\[E[AR(1)] = \frac{\beta_0}{1-\beta_1}\]
a i. \(\mu_1 = 0\)
\(\mu_2 = 0\)
\(\mu_3 = 0\)
\(\mu_4 = 0\)
\(\mu_5 = 10\)
b
ar1 <- genAR_1(0,0,200,1)
ar2 <- genAR_1(0,.9,200,1)
ar3 <- genAR_1(0,.1,200,1)
ar4 <- genAR_1(0,-.5,200,1)
ar5 <- genAR_1(1,.9,200,1)
plot(ar1, type = "l", ylim = c(-20,20))
lines(ar2, col = rainbow(4, alpha = .5)[1])
lines(ar3, col = rainbow(4, alpha = .5)[2])
lines(ar4, col = rainbow(4, alpha = .5)[3])
lines(ar5, col = rainbow(4, alpha = .5)[4])
Yes, the plots look like I would expect, all plots are centered around their respective means and “swing” as a function of their \(\beta_1\)
c
Yes, all of the series appear to be mean reverting, series 1-4 revert to 0, series 5 reverts to 10.
d
pr <- function(p){
print(p$pred)
}
p1 <- predict(arima(ar1, order = c(1,0,0)), n.ahead = 1)
p2 <- predict(arima(ar2, order = c(1,0,0)), n.ahead = 1)
p3 <- predict(arima(ar3, order = c(1,0,0)), n.ahead = 1)
p4 <- predict(arima(ar4, order = c(1,0,0)), n.ahead = 1)
p5 <- predict(arima(ar5, order = c(1,0,0)), n.ahead = 1)
mapply(pr, list(p1, p2, p3, p4, p5))
## Time Series:
## Start = 201
## End = 201
## Frequency = 1
## [1] 0.07059189
## Time Series:
## Start = 201
## End = 201
## Frequency = 1
## [1] 0.3189127
## Time Series:
## Start = 201
## End = 201
## Frequency = 1
## [1] -0.05395836
## Time Series:
## Start = 201
## End = 201
## Frequency = 1
## [1] 0.1817178
## Time Series:
## Start = 201
## End = 201
## Frequency = 1
## [1] 6.258797
## [1] 0.07059189 0.31891269 -0.05395836 0.18171775 6.25879667
e
oneStep <- function(beta0, beta1, yt){
beta0 + beta1*yt + rnorm(1)
}
arimas <- data.frame(beta0 = c(0,0,0,0,1), beta1 = c(0,.9, .1, -.5, .9), yt = c(ar1[200], ar2[200], ar3[200], ar4[200], ar5[200]))
mapply(oneStep, arimas$beta0, arimas$beta1, arimas$yt)
## [1] -1.1867108 2.3561288 1.1144438 0.4214035 7.5519689
f
myCI <- function(p){
fcast<-p$pred
upper<-p$pred+2*p$se
lower<-p$pred-2*p$se
return(data.frame(forecast = fcast, upr = upper, lwr = lower))
}
mapply(myCI, list(p1, p2, p3, p4, p5))
## [,1] [,2] [,3] [,4] [,5]
## forecast 0.07059189 0.3189127 -0.05395836 0.1817178 6.258797
## upr 2.078116 2.291878 1.993294 1.978698 8.290388
## lwr -1.936932 -1.654052 -2.101211 -1.615262 4.227205
g
resAR <- function(ar){
resid(arima(ar, order = c(1,0,0)))
}
resids <- mapply(resAR, list(ar1, ar2, ar3, ar4, ar5))
acf(resids[2:12,1])
acf(resids[2:12,2])
acf(resids[2:12,3])
acf(resids[2:12,4])
acf(resids[2:12,5])
a
ma1<-arima.sim(list(ma=c(.8)),n=200) + 1
ma2<-arima.sim(list(ma=c(-.8)),n=200)
plot(ma1, type = "l", ylim = c(-5,5))
lines(ma2, col = "red")
acf(ma1, lag.max=10)
pacf(ma1, lag.max=10)
acf(ma2, lag.max=10)
pacf(ma2, lag.max=10)
a
Therefore, the unconditional mean for ma1 \(= 1\) and the unconditional mean for ma2 \(= 0\)
b
plot(ma1, type = "l", ylim = c(-5,5))
lines(ma2, col = "red")
c
Yes, the series are mean reverting and they revert to their intercept term.
d
predict(ma1)$mean[1]
## [1] 1.172807
predict(ma2)$mean[1]
## [1] -0.007892871
e
maOneStep <- function(mean, theta, prev){
mean + theta*prev
}
maOneStep(1, .8, ma1[200])
## [1] 2.265388
maOneStep(0, -.8, ma2[200])
## [1] 0.6918266
f
maCI <- function(mean, theta, prev, p){
cur <- maOneStep(mean, theta, prev)
upper<-p$upper[1,2]
lower<-p$lower[1,2]
return(data.frame(forecast = cur, upr = upper, lwr = lower))
}
maCI(1, .8, ma1[200], predict(ma1))
## forecast upr lwr
## 1 2.265388 3.67773 -1.332117
maCI(0, -.8, ma2[200], predict(ma2))
## forecast upr lwr
## 1 0.6918266 2.738036 -2.753822
g
acf(ma1, lag.max=12)
pacf(ma1, lag.max=12)
acf(ma2, lag.max=12)
pacf(ma2, lag.max=12)
hpc <- read.xls("hpchicago.xls", skip = 1)
hpc <- subset(hpc, select = c(YEAR, CHXR, ret_raw, CHXR.SA, ret_sa))
hpc <- hpc[complete.cases(hpc),]
hpcXts <- xts(subset(hpc, select = c(ret_sa)), order.by = as.Date(as.yearmon(hpc$YEAR)))
plot(as.xts(hpcXts))
a
ar1 <- arima(hpc$ret_sa, order = c(1,0,0))
summary(ar1)
##
## Call:
## arima(x = hpc$ret_sa, order = c(1, 0, 0))
##
## Coefficients:
## ar1 intercept
## 0.6797 0.0027
## s.e. 0.0419 0.0010
##
## sigma^2 estimated as 3.404e-05: log likelihood = 1169.35, aic = -2332.69
##
## Training set error measures:
## ME RMSE MAE MPE MAPE MASE
## Training set -4.631477e-05 0.00583453 0.004084536 -Inf Inf 0.9346362
## ACF1
## Training set -0.1418769
b
ar4 <- arima(hpc$ret_sa, order = c(4,0,0))
acf(hpc$ret_sa)
pacf(hpc$ret_sa)
e4 <- resid(ar4)
acf(e4)
pacf(e4)
e1 <- resid(ar1)
acf(e1)
pacf(e1)
For this problem, I looked at the acf and pacf plots and compared the auto-correlation and partial auto-correlation of the residulas. Because we do not know any good test statistics for time series, I tried to visualally minimize the correlations in the residuals and ar4 was clearly the best.
c
1 - var(resid(ar4)) / var(fitted(ar4) +resid(ar4)) * ((length(fitted(ar4)) - 1) / (length(fitted(ar4)) - 4 - 1))
## [1] 0.4972696
1 - var(resid(ar1)) / var(fitted(ar1) + resid(ar1)) * ((length(fitted(ar4)) - 1) / (length(fitted(ar4)) - 1 - 1))
## [1] 0.4530995
d
par4 <- predict(ar4)
par1 <- predict(ar1)
par4$pred
## Time Series:
## Start = 315
## End = 315
## Frequency = 1
## [1] 0.01165995
par1$pred
## Time Series:
## Start = 315
## End = 315
## Frequency = 1
## [1] 0.01115224
ff <- read.xls("FedFunds.xls")
ff <- rename(x = ff, c("DATE......"="date", "X.FFO"="ffo"))
a
ffXts = xts(x=ff$ffo, order.by=as.Date(ff$date, "%Y-%m-%d"))
plot(as.xts(ffXts))
cat("Sample Mean: ", mean(ff$ffo), "Standard Error: ", sd(ff$ffo)/sqrt(length(ff$ffo)))
## Sample Mean: 4.606986 Standard Error: 0.1430624
b
cat("Sample Variance: ", var(ff$ffo))
## Sample Variance: 4.482243
c
acf(ff$ffo)
pacf(ff$ffo)
ar1 <- arima(ff$ffo, order = c(4,0,0))
arMa1 <- arima(ff$ffo, order = c(1,0,0))
summary(ar1)
##
## Call:
## arima(x = ff$ffo, order = c(4, 0, 0))
##
## Coefficients:
## ar1 ar2 ar3 ar4 intercept
## 1.3600 -0.1496 -0.0807 -0.1431 4.5878
## s.e. 0.0666 0.1137 0.1140 0.0684 0.8454
##
## sigma^2 estimated as 0.03322: log likelihood = 59.29, aic = -106.58
##
## Training set error measures:
## ME RMSE MAE MPE MAPE
## Training set -0.007134788 0.1822499 0.1180297 -0.4360891 3.114063
## MASE ACF1
## Training set 0.8040775 0.004547313
summary(arMa1)
##
## Call:
## arima(x = ff$ffo, order = c(1, 0, 0))
##
## Coefficients:
## ar1 intercept
## 0.9970 5.1829
## s.e. 0.0036 2.5550
##
## sigma^2 estimated as 0.05229: log likelihood = 9.82, aic = -13.65
##
## Training set error measures:
## ME RMSE MAE MPE MAPE MASE
## Training set -0.03129723 0.2286739 0.1468658 -1.027559 3.89155 1.000523
## ACF1
## Training set 0.5363346
d
acfPacfResid <- function (model){
acf(resid(model))
pacf(resid(model))
}
mapply(acfPacfResid, list(ar1,arMa1))
## [,1] [,2]
## acf Numeric,23 Numeric,23
## type "partial" "partial"
## n.used 219 219
## lag Numeric,23 Numeric,23
## series "resid(model)" "resid(model)"
## snames NULL NULL
e
xtsMe <- function(me){
myXts = xts(x=(ff$ffo - me$residuals), order.by=as.Date(ff$date, "%Y-%m-%d"))
plot(as.xts(myXts))
}
mapply(xtsMe, list(ar1, arMa1))
## [,1] [,2]
## [1,] List,12 List,12
## [2,] Raw,35992 Raw,35992
## [3,] NULL NULL
a
xtsMe <- function(me){
myXts = xts(x=me, order.by=as.Date(ff$date, "%Y-%m-%d"))
plot(as.xts(myXts))
}
ff$ar4Pred <- fitted(ar1)
ff$arMaPred <- fitted(arMa1)
ff$PayoffAr4 <- if (ff$ar4Pred[-1] > ff$Future[-1]) c(NA, ff$ffo[-1] - ff$Future[-1]) else c(NA, ff$Future[-1] - ff$ffo[-1])
## Warning in if (ff$ar4Pred[-1] > ff$Future[-1]) c(NA, ff$ffo[-1] - ff
## $Future[-1]) else c(NA, : the condition has length > 1 and only the first
## element will be used
ff$PayoffArMa <- if (ff$arMaPred[-1] > ff$Future[-1]) c(NA, ff$ffo[-1] - ff$Future[-1]) else c(NA, ff$Future[-1] - ff$ffo[-1])
## Warning in if (ff$arMaPred[-1] > ff$Future[-1]) c(NA, ff$ffo[-1] - ff
## $Future[-1]) else c(NA, : the condition has length > 1 and only the first
## element will be used
mapply(xtsMe, list(ff$PayoffAr4, ff$PayoffArMa))
## [,1] [,2]
## [1,] List,12 List,12
## [2,] Raw,35992 Raw,35992
## [3,] NULL NULL
b
kable(head(ff))
| date | ffo | Future | ar4Pred | arMaPred | PayoffAr4 | PayoffArMa |
|---|---|---|---|---|---|---|
| 1988-12-01 | 8.76 | NA | 8.407434 | 8.481840 | NA | NA |
| 1989-01-01 | 9.12 | 8.80 | 8.819827 | 8.749169 | 0.32 | -0.32 |
| 1989-02-01 | 9.36 | 9.23 | 9.287104 | 9.108078 | 0.13 | -0.13 |
| 1989-03-01 | 9.85 | 9.91 | 9.507284 | 9.347352 | -0.06 | 0.06 |
| 1989-04-01 | 9.84 | 9.86 | 10.067184 | 9.835868 | -0.02 | 0.02 |
| 1989-05-01 | 9.81 | 9.90 | 9.909377 | 9.825898 | -0.09 | 0.09 |
cat("Mean ar4: ", mean(ff$PayoffAr4[-1]), "Sd ar4: ", sd(ff$PayoffAr4[-1]))
## Mean ar4: -0.02252294 Sd ar4: 0.1331835
cat("Mean arMa: ", mean(ff$PayoffArMa[-1]), "Sd arMa: ", sd(ff$PayoffArMa[-1]))
## Mean arMa: 0.02252294 Sd arMa: 0.1331835
c
cat("Test Stat ar4: ",mean(ff$PayoffAr4[-1])/(sd(ff$PayoffAr4[-1])/sqrt(length(ff$PayoffAr4[-1]))))
## Test Stat ar4: -2.49691
cat("Test Stat arMa: ", mean(ff$PayoffArMa[-1])/(sd(ff$PayoffArMa[-1])/sqrt(length(ff$PayoffArMa[-1]))))
## Test Stat arMa: 2.49691
d
d <- ff$PayoffArMa[-1] - ff$PayoffAr4[-1]
mean(d)/(sd(d)/sqrt(length(d)))
## [1] 2.49691
a
returns <- read.csv("table.csv")
returns$DateTime <- as.Date(returns$Date, "%Y-%m-%d")
returns <- returns[order(returns$DateTime, decreasing=F),]
returns <- returns[complete.cases(returns),]
returns$continuous <- log(returns$Close) - log(returns$Open[1])
plot(returns$continuous, type = "l")
b
model <- arima(returns$continuous, order = c(1,0,0))
acf(resid(model)[2:20])
pacf(resid(model)[2:20])
c
model <- arima(returns$continuous, order = c(0,0,1))
acf(resid(model)[2:20])
pacf(resid(model)[2:20])